In [1]:
TrainingData_PATH = './training_data/'
TestData_PATH = './test_data/'
gesture1 = 'doubletap_'
gesture2 = 'fist_'
gesture3 = 'spread_'
gesture4 = 'static_'
gesture5 = 'wavein_'
gesture6 = 'waveout_'
txt_format = '.txt'
In [2]:
def rescaling_max_min(input_list):
    #https://www.zhihu.com/question/20467170
    max_value = max(input_list)
    min_value = min(input_list)
    rescaled_list = [(x-min_value)/(max_value-min_value) for x in input_list]
    return rescaled_list
In [3]:
def mean_normalization(input_list):
    #https://www.zhihu.com/question/20467170
    import numpy as np
    max_value = max(input_list)
    min_value = min(input_list)
    mean_value = np.mean(input_list)
    mean_normalized_list = [(x-mean_value)/(max_value-min_value) for x in input_list]
    return mean_normalized_list
In [4]:
def quantum_rescaling(input_list):
    #Quantum Mechanics Wavefunction
    import numpy as np
    squared_list = [x*x for x in input_list]
    squared_sum = np.sum(squared_list)
    quantum_rescaled_list = [x/squared_sum for x in input_list]
    return quantum_rescaled_list
In [5]:
def rescaled_2D(input_list):
    max_value = max(max(input_list))
    min_value = min(min(input_list))
    rescaled_list = [[(row_iter-min_value)/(max_value-min_value) for row_iter in column_iter] for column_iter in input_list]
    return rescaled_list
In [6]:
def MAV_cal(input_file):
    #Mean Absolute Value
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    MAV_list = [0]*channel_numbers
    with open(input_file) as file_var:
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                MAV_list[channel_iter] += math.fabs(float(value_line_channel))/number_of_lines
    
    return MAV_list
In [7]:
def RMS_cal(input_file):
    #Root Mean Square
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    RMS_list = []
    square_sum = [0]*channel_numbers
    with open(input_file) as file_var:
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                square_sum[channel_iter] += float(value_line_channel)**2
        
    for item in square_sum:
        RMS_list.append(math.sqrt(item/number_of_lines))
    
    return RMS_list
In [8]:
def ZC_cal(input_file):
    #Zero Crossings
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    ZC_list = [0]*channel_numbers
    value_previous_line = [0]*channel_numbers
    
    with open(input_file) as file_var:
        
        first_line = file_var.readline().split()
        for channel_iter in range(channel_numbers):
            value_previous_line[channel_iter] = float(first_line[channel_iter])
        
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                value_line_channel = float(value_line_channel)
                times_tmp = -value_line_channel*value_previous_line[channel_iter]
                if times_tmp>0:
                    ZC_list[channel_iter] += 1
                value_previous_line[channel_iter] = value_line_channel
                
    return ZC_list
In [9]:
def SSC_cal(input_file):
    #Slope Sign Changes
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    SSC_list = [0]*channel_numbers
    value_previous_previous_line = [0]*channel_numbers
    value_previous_line = [0]*channel_numbers
    
    with open(input_file) as file_var:
        
        first_line = file_var.readline().split()
        second_line = file_var.readline().split()
        for channel_iter in range(channel_numbers):
            value_previous_previous_line[channel_iter] = float(first_line[channel_iter])
            value_previous_line[channel_iter] = float(second_line[channel_iter])
        
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                value_line_channel = float(value_line_channel)
                slope_times_tmp = (value_previous_line[channel_iter]-value_previous_previous_line[channel_iter])*(value_previous_line[channel_iter]-value_line_channel)
                if slope_times_tmp>0:
                    SSC_list[channel_iter] += 1
                value_previous_previous_line[channel_iter] = value_previous_line[channel_iter]
                value_previous_line[channel_iter] = value_line_channel
                
    return SSC_list
In [10]:
def WL_cal(input_file):
    #Waveform Length
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    WL_list = [0]*channel_numbers
    value_previous_line = [0]*channel_numbers
    
    with open(input_file) as file_var:
        
        first_line = file_var.readline().split()
        for channel_iter in range(channel_numbers):
            value_previous_line[channel_iter] =float(first_line[channel_iter])
        
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                value_line_channel = float(value_line_channel)
                WL_list[channel_iter] += math.fabs(value_line_channel-value_previous_line[channel_iter])
                value_previous_line[channel_iter] = value_line_channel
    
    return WL_list
In [11]:
def WA_cal(epsilon_var, input_file):
    #Willison Amplitude
    #epsilon_var set as 2.5 originally
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    WA_list = [0]*channel_numbers
    value_previous_line = [0]*channel_numbers
    
    with open(input_file) as file_var:
        
        first_line = file_var.readline().split()
        for channel_iter in range(channel_numbers):
            value_previous_line[channel_iter] =float(first_line[channel_iter])
        
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                value_line_channel = float(value_line_channel)
                fabs_tmp = math.fabs(value_line_channel-value_previous_line[channel_iter])
                if fabs_tmp>epsilon_var:
                    WA_list[channel_iter] += 1
                value_previous_line[channel_iter] = value_line_channel
    
    return WA_list
In [12]:
def VAR_cal(input_file):
    #Variance
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    VAR_list = []
    square_sum = [0]*channel_numbers
    with open(input_file) as file_var:
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                square_sum[channel_iter] += float(value_line_channel)**2
        
    for item in square_sum:
        VAR_list.append(item/(number_of_lines-1))
    
    return VAR_list
In [13]:
def LogD_cal(input_file):
    #Log Detector
    resolution_size = 0.01
    import math
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    number_of_lines = 0
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    
    LogD_list = [0]*channel_numbers
    with open(input_file) as file_var:
        for line in file_var:
            line_iter = line.split()
            for channel_iter in range(channel_numbers):
                value_line_channel = line_iter[channel_iter]
                fabs_tmp = math.fabs(float(value_line_channel))
                if fabs_tmp < resolution_size:
                    fabs_tmp = resolution_size
                LogD_list[channel_iter] += math.log(fabs_tmp)/number_of_lines
                
    LogD_list = [math.exp(x) for x in LogD_list]
    
    return LogD_list
In [14]:
def ARC_cal(input_file):
    #Auto Regression Coefficient
    #https://machinelearningmastery.com/autoregression-models-time-series-forecasting-python/
    import pandas as pd
    from pandas import Series
    from matplotlib import pyplot
    from statsmodels.tsa.ar_model import AR #conda install statsmodels
    from sklearn.metrics import mean_squared_error #pip install -U scikit-learn scipy matplotlib #conda installe scikit-learn
    import numpy as np
    
    with open(input_file) as file_var:
        for line in file_var:
            channel_numbers =len(line.split())#actually 8 as already known
            break
    #print("Channel number:", channel_numbers)
    '''
    number_of_lines = 0    
    
    with open(input_file) as file_var:
        for line in file_var:
            number_of_lines += 1
    #print("Line number:", number_of_lines)
    '''
    
    ARC_list = []
    ARC_list_LagNumber = []
    
    '''
    for column_n in range(channel_numbers):
        ARC_list.append([])
    '''
    
    input_series = pd.read_csv(input_file, header=None)
    input_tmp_file = input_series.values
    number_of_lines = len(input_tmp_file)
    input_file_array = [ [ float(ele) for ele in input_tmp_file[line_n][0].split() ] for line_n in range(number_of_lines)]
    input_file_array = np.array(input_file_array)
    for column_n in range(channel_numbers):
        model_fit = AR(input_file_array[:,column_n]).fit()
        ARC_list_LagNumber.append(model_fit.k_ar)
        ARC_list.append(model_fit.params.tolist())

    return ARC_list_LagNumber, ARC_list
In [15]:
def ARC_results_plot(Input_LagList, Input_ARC_list, gesture_name, i_label):
    
    from mpl_toolkits.mplot3d import Axes3D  
    import matplotlib.pyplot as plt
    from matplotlib import cm
    from matplotlib.ticker import LinearLocator, FormatStrFormatter
    import numpy as np
    
    X_data = np.arange(min(Input_LagList)+1)
    Y_data = np.arange(len(Input_ARC_list))
    X_data, Y_data = np.meshgrid(X_data, Y_data)
    Z_data = np.array(Input_ARC_list)
    
    fig = plt.figure()
    ax = fig.gca(projection='3d')

    # Plot the surface.
    surf = ax.plot_surface(X_data, Y_data, Z_data, cmap=cm.coolwarm,
                       linewidth=0, antialiased=False)

    # Customize the z axis.
    ax.set_zlim(-1.01, 1.01)
    ax.zaxis.set_major_locator(LinearLocator(10))
    ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))

    # Add a color bar which maps values to colors.
    fig.colorbar(surf, shrink=0.5, aspect=5)
    
    plt.title(gesture_name+"ARC"+i_label)
    plt.show()
    return fig
In [16]:
def Ceps_cal(input_file):
    #Cepstrum coefficients
    LagList,ARC_List = ARC_cal(input_file)
    CepsList = []
    for channel_iter in range(len(ARC_List)):
        CepsList.append([-ARC_List[channel_iter][0]])
        
    for channel_iter in range(len(ARC_List)):
        item_size = LagList[channel_iter]+1
        for lag_iter in range(1, item_size):
            c_tmp = -ARC_List[channel_iter][lag_iter]
            for j_iter in range(0,lag_iter):
                c_tmp -= (lag_iter-j_iter)/(lag_iter+1)*CepsList[channel_iter][j_iter]*ARC_List[channel_iter][lag_iter-j_iter-1]
            CepsList[channel_iter].append(c_tmp)
    
    return CepsList
In [17]:
def Ceps_results_plot(Input_CepsList, gesture_name, i_label):
    
    from mpl_toolkits.mplot3d import Axes3D  
    import matplotlib.pyplot as plt
    from matplotlib import cm
    from matplotlib.ticker import LinearLocator, FormatStrFormatter
    import numpy as np
    
    X_data = np.arange(len(Input_CepsList[0]))
    Y_data = np.arange(len(Input_CepsList))
    X_data, Y_data = np.meshgrid(X_data, Y_data)
    Z_data = np.array(Input_CepsList)
    
    fig = plt.figure()
    ax = fig.gca(projection='3d')

    # Plot the surface.
    surf = ax.plot_surface(X_data, Y_data, Z_data, cmap=cm.coolwarm,
                       linewidth=0, antialiased=False)

    # Customize the z axis.
    #ax.set_zlim(-1.01, 1.01)
    ax.zaxis.set_major_locator(LinearLocator(10))
    ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))

    # Add a color bar which maps values to colors.
    fig.colorbar(surf, shrink=0.5, aspect=5)
    
    plt.title(gesture_name+"Ceps"+i_label)
    plt.show()
    return fig
In [18]:
def fMD_cal(input_file):
    #Median frequency
    return 0
In [19]:
def fME_cal(input_file):
    #Mean frequency
    return 0
In [20]:
def emgHist(input_file):
    #sEMG Histogram
    return 0
In [21]:
def list_plot_mean_normalized(input_list, label_name, fig, ax):
    #https://matplotlib.org/3.1.0/gallery/lines_bars_and_markers/simple_plot.html#sphx-glr-gallery-lines-bars-and-markers-simple-plot-py
    import matplotlib
    import matplotlib.pyplot as plt
    import numpy as np
    x_channel = np.arange(len(input_list))
    y_signal_feature = mean_normalization(input_list)
    ax.plot(x_channel, y_signal_feature, label=label_name)
    ax.legend()
    
    return fig
In [22]:
def list_plot_rescaled_max_min(input_list, label_name, fig, ax):
    #https://matplotlib.org/3.1.0/gallery/lines_bars_and_markers/simple_plot.html#sphx-glr-gallery-lines-bars-and-markers-simple-plot-py
    import matplotlib
    import matplotlib.pyplot as plt
    import numpy as np
    x_channel = np.arange(len(input_list))
    y_signal_feature = rescaling_max_min(input_list)
    ax.plot(x_channel, y_signal_feature, label=label_name)
    ax.legend()
    
    return fig
In [31]:
#import matplotlib.pyplot as plt#Step1

number_of_gestures = 6
NumberPerGesture = 42

training_images = []
training_labels = []

import numpy as np
training_ARC_images = np.empty((0,8,8), int)
training_ARC_labels = []

training_Ceps_images = np.empty((0,8,8), int)
training_Ceps_labels = []

for gesture_iter in range(number_of_gestures):
    
    gesture = "gesture"+str(gesture_iter+1)
    gesture = locals()[gesture]

    #fig, ax = plt.subplots()Step2
    
    for i in range(NumberPerGesture):

        import matplotlib.pyplot as plt
        feature_list = []
        ARC_list_inverted = []
        Ceps_list_inverted = []

        number_of_lines = 0
        i_label = i+1
        input_file = TrainingData_PATH+gesture+str(i_label)+txt_format

        feature_list.append(rescaling_max_min(MAV_cal(input_file)))#Line0
        #fig_MAV = list_plot_rescaled_max_min(MAV_cal(input_file),gesture+"MAV"+str(i_label), fig, ax)Step3
        feature_list.append(rescaling_max_min(RMS_cal(input_file)))#Line1
        feature_list.append(rescaling_max_min(ZC_cal(input_file)))#Line2
        feature_list.append(rescaling_max_min(SSC_cal(input_file)))#Line3
        feature_list.append(rescaling_max_min(WL_cal(input_file)))#Line4
        feature_list.append(rescaling_max_min(WA_cal(2.5,input_file)))#Line5
        feature_list.append(rescaling_max_min(VAR_cal(input_file)))#Line6
        feature_list.append(rescaling_max_min(LogD_cal(input_file)))#Line7
        
        training_images.append(feature_list)
        training_labels.append(gesture_iter+1)
        
        import matplotlib.pyplot as plt
        plt.imshow(feature_list)
        #print(feature_list)
        print(gesture+str(i_label)+" Row: %s, Column: %s" % (len(feature_list),len(feature_list[0])))


        LagList,ARC_List = ARC_cal(input_file)
        for item in [[row[i] for row in ARC_List] for i in range(len(ARC_List[0]))]:
            ARC_list_inverted.append(item)#Line8-19
        fig_tmp = ARC_results_plot(LagList, ARC_List, gesture, str(i_label))
        #fig_tmp.savefig(gesture+str(i_label)+"ARC"+".jpg")
        
        training_ARC_images = np.concatenate((training_ARC_images, [rescaled_2D(ARC_list_inverted)[:8]]), axis = 0)
        training_ARC_labels.append(gesture_iter+1)
        plt.imshow(rescaled_2D(ARC_list_inverted))

        
        Ceps_List = Ceps_cal(input_file)
        for item in [[row[i] for row in Ceps_List] for i in range(len(Ceps_List[0]))]:
            Ceps_list_inverted.append(item)#Line20-31
        fig_tmp = Ceps_results_plot(Ceps_list_inverted, gesture, str(i_label))
        #fig_tmp.savefig(gesture+str(i_label)+"Ceps"+".jpg")
        
        training_Ceps_images = np.concatenate((training_Ceps_images, [rescaled_2D(Ceps_list_inverted)[:8]]), axis = 0)
        training_Ceps_labels.append(gesture_iter+1)
        plt.imshow(rescaled_2D(Ceps_list_inverted))

    #plt.show()#Step4
    #fig_MAV.savefig(gesture+"MAV_Rescaled"+".jpg")#Step5#END
doubletap_1 Row: 8, Column: 8
doubletap_2 Row: 8, Column: 8
doubletap_3 Row: 8, Column: 8
doubletap_4 Row: 8, Column: 8
doubletap_5 Row: 8, Column: 8
doubletap_6 Row: 8, Column: 8
doubletap_7 Row: 8, Column: 8
doubletap_8 Row: 8, Column: 8
doubletap_9 Row: 8, Column: 8
doubletap_10 Row: 8, Column: 8
doubletap_11 Row: 8, Column: 8
doubletap_12 Row: 8, Column: 8
doubletap_13 Row: 8, Column: 8
doubletap_14 Row: 8, Column: 8
doubletap_15 Row: 8, Column: 8
doubletap_16 Row: 8, Column: 8
doubletap_17 Row: 8, Column: 8
doubletap_18 Row: 8, Column: 8
doubletap_19 Row: 8, Column: 8
doubletap_20 Row: 8, Column: 8
doubletap_21 Row: 8, Column: 8
doubletap_22 Row: 8, Column: 8
doubletap_23 Row: 8, Column: 8
doubletap_24 Row: 8, Column: 8
doubletap_25 Row: 8, Column: 8
doubletap_26 Row: 8, Column: 8
doubletap_27 Row: 8, Column: 8
doubletap_28 Row: 8, Column: 8
doubletap_29 Row: 8, Column: 8
doubletap_30 Row: 8, Column: 8
doubletap_31 Row: 8, Column: 8
doubletap_32 Row: 8, Column: 8
doubletap_33 Row: 8, Column: 8
doubletap_34 Row: 8, Column: 8
doubletap_35 Row: 8, Column: 8
doubletap_36 Row: 8, Column: 8
doubletap_37 Row: 8, Column: 8
doubletap_38 Row: 8, Column: 8
doubletap_39 Row: 8, Column: 8
doubletap_40 Row: 8, Column: 8
doubletap_41 Row: 8, Column: 8
doubletap_42 Row: 8, Column: 8
fist_1 Row: 8, Column: 8
fist_2 Row: 8, Column: 8
fist_3 Row: 8, Column: 8
fist_4 Row: 8, Column: 8
fist_5 Row: 8, Column: 8
fist_6 Row: 8, Column: 8
fist_7 Row: 8, Column: 8
fist_8 Row: 8, Column: 8
fist_9 Row: 8, Column: 8
fist_10 Row: 8, Column: 8
fist_11 Row: 8, Column: 8
fist_12 Row: 8, Column: 8
fist_13 Row: 8, Column: 8
fist_14 Row: 8, Column: 8
fist_15 Row: 8, Column: 8
fist_16 Row: 8, Column: 8
fist_17 Row: 8, Column: 8
fist_18 Row: 8, Column: 8
fist_19 Row: 8, Column: 8
fist_20 Row: 8, Column: 8
fist_21 Row: 8, Column: 8
fist_22 Row: 8, Column: 8
fist_23 Row: 8, Column: 8
fist_24 Row: 8, Column: 8
fist_25 Row: 8, Column: 8
fist_26 Row: 8, Column: 8
fist_27 Row: 8, Column: 8
fist_28 Row: 8, Column: 8
fist_29 Row: 8, Column: 8
fist_30 Row: 8, Column: 8
fist_31 Row: 8, Column: 8
fist_32 Row: 8, Column: 8
fist_33 Row: 8, Column: 8
fist_34 Row: 8, Column: 8
fist_35 Row: 8, Column: 8
fist_36 Row: 8, Column: 8
fist_37 Row: 8, Column: 8
fist_38 Row: 8, Column: 8
fist_39 Row: 8, Column: 8
fist_40 Row: 8, Column: 8
fist_41 Row: 8, Column: 8
fist_42 Row: 8, Column: 8
spread_1 Row: 8, Column: 8
spread_2 Row: 8, Column: 8
spread_3 Row: 8, Column: 8
spread_4 Row: 8, Column: 8
spread_5 Row: 8, Column: 8
spread_6 Row: 8, Column: 8
spread_7 Row: 8, Column: 8
spread_8 Row: 8, Column: 8
spread_9 Row: 8, Column: 8
spread_10 Row: 8, Column: 8
spread_11 Row: 8, Column: 8
spread_12 Row: 8, Column: 8
spread_13 Row: 8, Column: 8
spread_14 Row: 8, Column: 8
spread_15 Row: 8, Column: 8
spread_16 Row: 8, Column: 8
spread_17 Row: 8, Column: 8
spread_18 Row: 8, Column: 8
spread_19 Row: 8, Column: 8
spread_20 Row: 8, Column: 8
spread_21 Row: 8, Column: 8
spread_22 Row: 8, Column: 8
spread_23 Row: 8, Column: 8
spread_24 Row: 8, Column: 8
spread_25 Row: 8, Column: 8
spread_26 Row: 8, Column: 8
spread_27 Row: 8, Column: 8
spread_28 Row: 8, Column: 8
spread_29 Row: 8, Column: 8
spread_30 Row: 8, Column: 8
spread_31 Row: 8, Column: 8
spread_32 Row: 8, Column: 8
spread_33 Row: 8, Column: 8
spread_34 Row: 8, Column: 8
spread_35 Row: 8, Column: 8
spread_36 Row: 8, Column: 8
spread_37 Row: 8, Column: 8
spread_38 Row: 8, Column: 8
spread_39 Row: 8, Column: 8
spread_40 Row: 8, Column: 8
spread_41 Row: 8, Column: 8
spread_42 Row: 8, Column: 8
static_1 Row: 8, Column: 8
static_2 Row: 8, Column: 8
static_3 Row: 8, Column: 8
static_4 Row: 8, Column: 8
static_5 Row: 8, Column: 8
static_6 Row: 8, Column: 8
static_7 Row: 8, Column: 8
static_8 Row: 8, Column: 8
static_9 Row: 8, Column: 8
static_10 Row: 8, Column: 8
static_11 Row: 8, Column: 8
static_12 Row: 8, Column: 8
static_13 Row: 8, Column: 8
static_14 Row: 8, Column: 8
static_15 Row: 8, Column: 8
static_16 Row: 8, Column: 8
static_17 Row: 8, Column: 8
static_18 Row: 8, Column: 8
static_19 Row: 8, Column: 8
static_20 Row: 8, Column: 8
static_21 Row: 8, Column: 8
static_22 Row: 8, Column: 8
static_23 Row: 8, Column: 8
static_24 Row: 8, Column: 8
static_25 Row: 8, Column: 8
static_26 Row: 8, Column: 8
static_27 Row: 8, Column: 8
static_28 Row: 8, Column: 8
static_29 Row: 8, Column: 8
static_30 Row: 8, Column: 8
static_31 Row: 8, Column: 8
static_32 Row: 8, Column: 8
static_33 Row: 8, Column: 8
static_34 Row: 8, Column: 8
static_35 Row: 8, Column: 8
static_36 Row: 8, Column: 8
static_37 Row: 8, Column: 8
static_38 Row: 8, Column: 8
static_39 Row: 8, Column: 8
static_40 Row: 8, Column: 8
static_41 Row: 8, Column: 8
static_42 Row: 8, Column: 8
wavein_1 Row: 8, Column: 8
wavein_2 Row: 8, Column: 8
wavein_3 Row: 8, Column: 8
wavein_4 Row: 8, Column: 8
wavein_5 Row: 8, Column: 8
wavein_6 Row: 8, Column: 8
wavein_7 Row: 8, Column: 8
wavein_8 Row: 8, Column: 8
wavein_9 Row: 8, Column: 8
wavein_10 Row: 8, Column: 8
wavein_11 Row: 8, Column: 8
wavein_12 Row: 8, Column: 8
wavein_13 Row: 8, Column: 8
wavein_14 Row: 8, Column: 8
wavein_15 Row: 8, Column: 8
wavein_16 Row: 8, Column: 8
wavein_17 Row: 8, Column: 8
wavein_18 Row: 8, Column: 8
wavein_19 Row: 8, Column: 8
wavein_20 Row: 8, Column: 8
wavein_21 Row: 8, Column: 8
wavein_22 Row: 8, Column: 8
wavein_23 Row: 8, Column: 8
wavein_24 Row: 8, Column: 8
wavein_25 Row: 8, Column: 8
wavein_26 Row: 8, Column: 8
wavein_27 Row: 8, Column: 8
wavein_28 Row: 8, Column: 8
wavein_29 Row: 8, Column: 8
wavein_30 Row: 8, Column: 8
wavein_31 Row: 8, Column: 8
wavein_32 Row: 8, Column: 8
wavein_33 Row: 8, Column: 8
wavein_34 Row: 8, Column: 8
wavein_35 Row: 8, Column: 8
wavein_36 Row: 8, Column: 8
wavein_37 Row: 8, Column: 8
wavein_38 Row: 8, Column: 8
wavein_39 Row: 8, Column: 8
wavein_40 Row: 8, Column: 8
wavein_41 Row: 8, Column: 8
wavein_42 Row: 8, Column: 8
waveout_1 Row: 8, Column: 8
waveout_2 Row: 8, Column: 8
waveout_3 Row: 8, Column: 8
waveout_4 Row: 8, Column: 8
waveout_5 Row: 8, Column: 8
waveout_6 Row: 8, Column: 8
waveout_7 Row: 8, Column: 8
waveout_8 Row: 8, Column: 8
waveout_9 Row: 8, Column: 8
waveout_10 Row: 8, Column: 8
waveout_11 Row: 8, Column: 8
waveout_12 Row: 8, Column: 8
waveout_13 Row: 8, Column: 8
waveout_14 Row: 8, Column: 8
waveout_15 Row: 8, Column: 8
waveout_16 Row: 8, Column: 8
waveout_17 Row: 8, Column: 8
waveout_18 Row: 8, Column: 8
waveout_19 Row: 8, Column: 8
waveout_20 Row: 8, Column: 8
waveout_21 Row: 8, Column: 8
waveout_22 Row: 8, Column: 8
waveout_23 Row: 8, Column: 8
waveout_24 Row: 8, Column: 8
waveout_25 Row: 8, Column: 8
waveout_26 Row: 8, Column: 8
waveout_27 Row: 8, Column: 8
waveout_28 Row: 8, Column: 8
waveout_29 Row: 8, Column: 8
waveout_30 Row: 8, Column: 8
waveout_31 Row: 8, Column: 8
waveout_32 Row: 8, Column: 8
waveout_33 Row: 8, Column: 8
waveout_34 Row: 8, Column: 8
waveout_35 Row: 8, Column: 8
waveout_36 Row: 8, Column: 8
waveout_37 Row: 8, Column: 8
waveout_38 Row: 8, Column: 8
waveout_39 Row: 8, Column: 8
waveout_40 Row: 8, Column: 8
waveout_41 Row: 8, Column: 8
waveout_42 Row: 8, Column: 8

import matplotlib.pyplot as plt#Step1

number_of_gestures = 3 NumberPerGesture = 7

test_images = [] test_labels = []

import numpy as np test_ARC_images = np.empty((0,10,8), int) test_ARC_labels = []

test_Ceps_images = np.empty((0,10,8), int) test_Ceps_labels = []

for gesture_iter in range(number_of_gestures):

gesture = "gesture"+str(gesture_iter+1)
gesture = locals()[gesture]

#fig, ax = plt.subplots()Step2

for i in range(NumberPerGesture):

    import matplotlib.pyplot as plt
    feature_list = []
    ARC_list_inverted = []
    Ceps_list_inverted = []

    number_of_lines = 0
    i_label = i+1
    input_file = TestData_PATH+gesture+str(i_label)+txt_format

    feature_list.append(rescaling_max_min(MAV_cal(input_file)))#Line0
    #fig_MAV = list_plot_rescaled_max_min(MAV_cal(input_file),gesture+"MAV"+str(i_label), fig, ax)Step3
    feature_list.append(rescaling_max_min(RMS_cal(input_file)))#Line1
    feature_list.append(rescaling_max_min(ZC_cal(input_file)))#Line2
    feature_list.append(rescaling_max_min(SSC_cal(input_file)))#Line3
    feature_list.append(rescaling_max_min(WL_cal(input_file)))#Line4
    feature_list.append(rescaling_max_min(WA_cal(2.5,input_file)))#Line5
    feature_list.append(rescaling_max_min(VAR_cal(input_file)))#Line6
    feature_list.append(rescaling_max_min(LogD_cal(input_file)))#Line7

    test_images.append(feature_list)
    test_labels.append(gesture_iter+1)

    import matplotlib.pyplot as plt
    plt.imshow(feature_list)
    #print(feature_list)
    print(gesture+str(i_label)+" Row: %s, Column: %s" % (len(feature_list),len(feature_list[0])))


    LagList,ARC_List = ARC_cal(input_file)
    for item in [[row[i] for row in ARC_List] for i in range(len(ARC_List[0]))]:
        ARC_list_inverted.append(item)#Line8-19
    fig_tmp = ARC_results_plot(LagList, ARC_List, gesture, str(i_label))
    #fig_tmp.savefig(gesture+str(i_label)+"ARC"+".jpg")

    test_ARC_images = np.concatenate((test_ARC_images, [rescaled_2D(ARC_list_inverted)[:10]]), axis = 0)
    test_ARC_labels.append(gesture_iter+1)
    plt.imshow(rescaled_2D(ARC_list_inverted))


    Ceps_List = Ceps_cal(input_file)
    for item in [[row[i] for row in Ceps_List] for i in range(len(Ceps_List[0]))]:
        Ceps_list_inverted.append(item)#Line20-31
    fig_tmp = Ceps_results_plot(Ceps_list_inverted, gesture, str(i_label))
    #fig_tmp.savefig(gesture+str(i_label)+"Ceps"+".jpg")

    test_Ceps_images = np.concatenate((test_Ceps_images, [rescaled_2D(Ceps_list_inverted)[:10]]), axis = 0)
    test_Ceps_labels.append(gesture_iter+1)
    plt.imshow(rescaled_2D(Ceps_list_inverted))

#plt.show()#Step4
#fig_MAV.savefig(gesture+"MAV_Rescaled"+".jpg")#Step5#END
In [ ]:
 
In [32]:
import numpy as np

training_images = np.array(training_images)
training_labels = np.array(training_labels)
#test_images = np.array(test_images)
#test_labels = np.array(test_labels)

training_ARC_images = np.array(training_ARC_images)
training_ARC_labels = np.array(training_ARC_labels)
#test_ARC_images = np.array(test_ARC_images)
#test_ARC_labels = np.array(test_ARC_labels)

training_Ceps_images = np.array(training_Ceps_images)
training_Ceps_labels = np.array(training_Ceps_labels)
#test_Ceps_images = np.array(test_Ceps_images)
#test_Ceps_labels = np.array(test_Ceps_labels)
In [ ]:
 
In [33]:
import random
index_ran = [i for i in range(len(training_images))]
random.shuffle(index_ran)
In [34]:
training_images = training_images[index_ran]
training_labels = training_labels[index_ran]
training_ARC_images = training_ARC_images[index_ran]
training_ARC_labels = training_ARC_labels[index_ran]
training_Ceps_images = training_Ceps_images[index_ran]
training_Ceps_labels = training_Ceps_labels[index_ran]
In [35]:
test_number = 24
test_images = training_images[-test_number:]
test_labels = training_labels[-test_number:]
training_images = training_images[:-test_number]
training_labels = training_labels[:-test_number]
test_ARC_images = training_ARC_images[-test_number:]
test_ARC_labels = training_ARC_labels[-test_number:]
training_ARC_images = training_ARC_images[:-test_number]
training_ARC_labels = training_ARC_labels[:-test_number]
test_Ceps_images = training_Ceps_images[-test_number:]
test_Ceps_labels = training_Ceps_labels[-test_number:]
training_Ceps_images = training_Ceps_images[:-test_number]
training_Ceps_labels = training_Ceps_labels[:-test_number]
In [36]:
training_images = np.array(training_images)
training_labels = np.array(training_labels)
test_images = np.array(test_images)
test_labels = np.array(test_labels)

training_ARC_images = np.array(training_ARC_images)
training_ARC_labels = np.array(training_ARC_labels)
test_ARC_images = np.array(test_ARC_images)
test_ARC_labels = np.array(test_ARC_labels)

training_Ceps_images = np.array(training_Ceps_images)
training_Ceps_labels = np.array(training_Ceps_labels)
test_Ceps_images = np.array(test_Ceps_images)
test_Ceps_labels = np.array(test_Ceps_labels)
In [37]:
import tensorflow as tf
from tensorflow import keras

model = keras.Sequential([
    keras.layers.Flatten(input_shape=(8,8)),
    keras.layers.Dense(10240, activation = tf.nn.relu),
    keras.layers.Dense(7, activation = tf.nn.softmax)
])

model.compile(optimizer = tf.train.AdamOptimizer(), loss = 'sparse_categorical_crossentropy', metrics=['accuracy'])

model.fit(training_images, training_labels, epochs = 500)
Epoch 1/500
228/228 [==============================] - 0s 869us/sample - loss: 1.4232 - acc: 0.4430
Epoch 2/500
228/228 [==============================] - 0s 249us/sample - loss: 0.6882 - acc: 0.7675
Epoch 3/500
228/228 [==============================] - 0s 253us/sample - loss: 0.4697 - acc: 0.8377
Epoch 4/500
228/228 [==============================] - 0s 254us/sample - loss: 0.3480 - acc: 0.8904
Epoch 5/500
228/228 [==============================] - 0s 250us/sample - loss: 0.2947 - acc: 0.9123
Epoch 6/500
228/228 [==============================] - 0s 250us/sample - loss: 0.2734 - acc: 0.8640
Epoch 7/500
228/228 [==============================] - 0s 251us/sample - loss: 0.2623 - acc: 0.9035
Epoch 8/500
228/228 [==============================] - 0s 250us/sample - loss: 0.1966 - acc: 0.9430
Epoch 9/500
228/228 [==============================] - 0s 253us/sample - loss: 0.1625 - acc: 0.9561
Epoch 10/500
228/228 [==============================] - 0s 255us/sample - loss: 0.1696 - acc: 0.9298
Epoch 11/500
228/228 [==============================] - 0s 256us/sample - loss: 0.1486 - acc: 0.9386
Epoch 12/500
228/228 [==============================] - 0s 252us/sample - loss: 0.1198 - acc: 0.9825
Epoch 13/500
228/228 [==============================] - 0s 252us/sample - loss: 0.0874 - acc: 0.9868
Epoch 14/500
228/228 [==============================] - 0s 254us/sample - loss: 0.0718 - acc: 1.0000
Epoch 15/500
228/228 [==============================] - 0s 252us/sample - loss: 0.0778 - acc: 0.9825
Epoch 16/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0704 - acc: 0.9912
Epoch 17/500
228/228 [==============================] - 0s 256us/sample - loss: 0.0524 - acc: 0.9956
Epoch 18/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0776 - acc: 0.9868
Epoch 19/500
228/228 [==============================] - 0s 254us/sample - loss: 0.0529 - acc: 0.9868
Epoch 20/500
228/228 [==============================] - 0s 254us/sample - loss: 0.0447 - acc: 1.0000
Epoch 21/500
228/228 [==============================] - 0s 253us/sample - loss: 0.0383 - acc: 1.0000
Epoch 22/500
228/228 [==============================] - 0s 248us/sample - loss: 0.0341 - acc: 1.0000
Epoch 23/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0319 - acc: 1.0000
Epoch 24/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0304 - acc: 1.0000
Epoch 25/500
228/228 [==============================] - 0s 269us/sample - loss: 0.0302 - acc: 1.0000
Epoch 26/500
228/228 [==============================] - 0s 264us/sample - loss: 0.0237 - acc: 1.0000
Epoch 27/500
228/228 [==============================] - 0s 266us/sample - loss: 0.0222 - acc: 1.0000
Epoch 28/500
228/228 [==============================] - 0s 264us/sample - loss: 0.0266 - acc: 1.0000
Epoch 29/500
228/228 [==============================] - 0s 260us/sample - loss: 0.0232 - acc: 1.0000
Epoch 30/500
228/228 [==============================] - 0s 258us/sample - loss: 0.0195 - acc: 1.0000
Epoch 31/500
228/228 [==============================] - 0s 265us/sample - loss: 0.0319 - acc: 0.9912
Epoch 32/500
228/228 [==============================] - 0s 268us/sample - loss: 0.0276 - acc: 1.0000
Epoch 33/500
228/228 [==============================] - 0s 275us/sample - loss: 0.0206 - acc: 0.9956
Epoch 34/500
228/228 [==============================] - 0s 298us/sample - loss: 0.0143 - acc: 1.0000
Epoch 35/500
228/228 [==============================] - 0s 276us/sample - loss: 0.0139 - acc: 1.0000
Epoch 36/500
228/228 [==============================] - 0s 275us/sample - loss: 0.0124 - acc: 1.0000
Epoch 37/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0165 - acc: 1.0000
Epoch 38/500
228/228 [==============================] - 0s 269us/sample - loss: 0.0137 - acc: 1.0000
Epoch 39/500
228/228 [==============================] - 0s 272us/sample - loss: 0.0108 - acc: 1.0000
Epoch 40/500
228/228 [==============================] - 0s 260us/sample - loss: 0.0139 - acc: 1.0000
Epoch 41/500
228/228 [==============================] - 0s 256us/sample - loss: 0.0099 - acc: 1.0000
Epoch 42/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0097 - acc: 1.0000
Epoch 43/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0089 - acc: 1.0000
Epoch 44/500
228/228 [==============================] - 0s 253us/sample - loss: 0.0087 - acc: 1.0000
Epoch 45/500
228/228 [==============================] - 0s 255us/sample - loss: 0.0081 - acc: 1.0000
Epoch 46/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0123 - acc: 1.0000
Epoch 47/500
228/228 [==============================] - 0s 248us/sample - loss: 0.0081 - acc: 1.0000
Epoch 48/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0080 - acc: 1.0000
Epoch 49/500
228/228 [==============================] - 0s 266us/sample - loss: 0.0071 - acc: 1.0000
Epoch 50/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0060 - acc: 1.0000
Epoch 51/500
228/228 [==============================] - 0s 253us/sample - loss: 0.0059 - acc: 1.0000
Epoch 52/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0054 - acc: 1.0000
Epoch 53/500
228/228 [==============================] - 0s 252us/sample - loss: 0.0055 - acc: 1.0000
Epoch 54/500
228/228 [==============================] - 0s 258us/sample - loss: 0.0053 - acc: 1.0000
Epoch 55/500
228/228 [==============================] - 0s 256us/sample - loss: 0.0050 - acc: 1.0000
Epoch 56/500
228/228 [==============================] - 0s 256us/sample - loss: 0.0049 - acc: 1.0000
Epoch 57/500
228/228 [==============================] - 0s 258us/sample - loss: 0.0049 - acc: 1.0000
Epoch 58/500
228/228 [==============================] - 0s 262us/sample - loss: 0.0045 - acc: 1.0000
Epoch 59/500
228/228 [==============================] - 0s 256us/sample - loss: 0.0044 - acc: 1.0000
Epoch 60/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0041 - acc: 1.0000
Epoch 61/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0041 - acc: 1.0000
Epoch 62/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0039 - acc: 1.0000
Epoch 63/500
228/228 [==============================] - 0s 253us/sample - loss: 0.0039 - acc: 1.0000
Epoch 64/500
228/228 [==============================] - 0s 244us/sample - loss: 0.0037 - acc: 1.0000
Epoch 65/500
228/228 [==============================] - 0s 247us/sample - loss: 0.0036 - acc: 1.0000
Epoch 66/500
228/228 [==============================] - 0s 261us/sample - loss: 0.0035 - acc: 1.0000
Epoch 67/500
228/228 [==============================] - 0s 264us/sample - loss: 0.0033 - acc: 1.0000
Epoch 68/500
228/228 [==============================] - 0s 255us/sample - loss: 0.0032 - acc: 1.0000
Epoch 69/500
228/228 [==============================] - 0s 258us/sample - loss: 0.0032 - acc: 1.0000
Epoch 70/500
228/228 [==============================] - 0s 254us/sample - loss: 0.0032 - acc: 1.0000
Epoch 71/500
228/228 [==============================] - 0s 248us/sample - loss: 0.0029 - acc: 1.0000
Epoch 72/500
228/228 [==============================] - 0s 250us/sample - loss: 0.0030 - acc: 1.0000
Epoch 73/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0029 - acc: 1.0000
Epoch 74/500
228/228 [==============================] - 0s 272us/sample - loss: 0.0030 - acc: 1.0000
Epoch 75/500
228/228 [==============================] - 0s 269us/sample - loss: 0.0027 - acc: 1.0000
Epoch 76/500
228/228 [==============================] - 0s 257us/sample - loss: 0.0026 - acc: 1.0000
Epoch 77/500
228/228 [==============================] - 0s 257us/sample - loss: 0.0026 - acc: 1.0000
Epoch 78/500
228/228 [==============================] - 0s 269us/sample - loss: 0.0024 - acc: 1.0000
Epoch 79/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0024 - acc: 1.0000
Epoch 80/500
228/228 [==============================] - 0s 266us/sample - loss: 0.0023 - acc: 1.0000
Epoch 81/500
228/228 [==============================] - 0s 257us/sample - loss: 0.0023 - acc: 1.0000
Epoch 82/500
228/228 [==============================] - 0s 260us/sample - loss: 0.0024 - acc: 1.0000
Epoch 83/500
228/228 [==============================] - 0s 257us/sample - loss: 0.0023 - acc: 1.0000
Epoch 84/500
228/228 [==============================] - 0s 248us/sample - loss: 0.0022 - acc: 1.0000
Epoch 85/500
228/228 [==============================] - 0s 243us/sample - loss: 0.0021 - acc: 1.0000
Epoch 86/500
228/228 [==============================] - 0s 244us/sample - loss: 0.0020 - acc: 1.0000
Epoch 87/500
228/228 [==============================] - 0s 246us/sample - loss: 0.0020 - acc: 1.0000
Epoch 88/500
228/228 [==============================] - 0s 242us/sample - loss: 0.0019 - acc: 1.0000
Epoch 89/500
228/228 [==============================] - 0s 248us/sample - loss: 0.0019 - acc: 1.0000
Epoch 90/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0021 - acc: 1.0000
Epoch 91/500
228/228 [==============================] - 0s 252us/sample - loss: 0.0018 - acc: 1.0000
Epoch 92/500
228/228 [==============================] - 0s 245us/sample - loss: 0.0018 - acc: 1.0000
Epoch 93/500
228/228 [==============================] - 0s 252us/sample - loss: 0.0018 - acc: 1.0000
Epoch 94/500
228/228 [==============================] - 0s 247us/sample - loss: 0.0017 - acc: 1.0000
Epoch 95/500
228/228 [==============================] - 0s 253us/sample - loss: 0.0016 - acc: 1.0000
Epoch 96/500
228/228 [==============================] - 0s 250us/sample - loss: 0.0017 - acc: 1.0000
Epoch 97/500
228/228 [==============================] - 0s 245us/sample - loss: 0.0016 - acc: 1.0000
Epoch 98/500
228/228 [==============================] - 0s 250us/sample - loss: 0.0017 - acc: 1.0000
Epoch 99/500
228/228 [==============================] - 0s 251us/sample - loss: 0.0015 - acc: 1.0000
Epoch 100/500
228/228 [==============================] - 0s 267us/sample - loss: 0.0015 - acc: 1.0000
Epoch 101/500
228/228 [==============================] - 0s 269us/sample - loss: 0.0014 - acc: 1.0000
Epoch 102/500
228/228 [==============================] - 0s 264us/sample - loss: 0.0014 - acc: 1.0000
Epoch 103/500
228/228 [==============================] - 0s 261us/sample - loss: 0.0014 - acc: 1.0000
Epoch 104/500
228/228 [==============================] - 0s 255us/sample - loss: 0.0014 - acc: 1.0000
Epoch 105/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0014 - acc: 1.0000
Epoch 106/500
228/228 [==============================] - 0s 261us/sample - loss: 0.0013 - acc: 1.0000
Epoch 107/500
228/228 [==============================] - 0s 278us/sample - loss: 0.0013 - acc: 1.0000
Epoch 108/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0013 - acc: 1.0000
Epoch 109/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0012 - acc: 1.0000
Epoch 110/500
228/228 [==============================] - 0s 256us/sample - loss: 0.0012 - acc: 1.0000
Epoch 111/500
228/228 [==============================] - 0s 259us/sample - loss: 0.0012 - acc: 1.0000
Epoch 112/500
228/228 [==============================] - 0s 263us/sample - loss: 0.0011 - acc: 1.0000
Epoch 113/500
228/228 [==============================] - 0s 268us/sample - loss: 0.0012 - acc: 1.0000
Epoch 114/500
228/228 [==============================] - 0s 255us/sample - loss: 0.0011 - acc: 1.0000
Epoch 115/500
228/228 [==============================] - 0s 248us/sample - loss: 0.0011 - acc: 1.0000
Epoch 116/500
228/228 [==============================] - 0s 247us/sample - loss: 0.0011 - acc: 1.0000
Epoch 117/500
228/228 [==============================] - 0s 252us/sample - loss: 0.0011 - acc: 1.0000
Epoch 118/500
228/228 [==============================] - 0s 254us/sample - loss: 0.0010 - acc: 1.0000
Epoch 119/500
228/228 [==============================] - 0s 245us/sample - loss: 0.0010 - acc: 1.0000
Epoch 120/500
228/228 [==============================] - 0s 244us/sample - loss: 0.0010 - acc: 1.0000
Epoch 121/500
228/228 [==============================] - 0s 253us/sample - loss: 0.0010 - acc: 1.0000
Epoch 122/500
228/228 [==============================] - 0s 267us/sample - loss: 9.7220e-04 - acc: 1.0000
Epoch 123/500
228/228 [==============================] - 0s 273us/sample - loss: 9.3866e-04 - acc: 1.0000
Epoch 124/500
228/228 [==============================] - 0s 263us/sample - loss: 9.2487e-04 - acc: 1.0000
Epoch 125/500
228/228 [==============================] - 0s 257us/sample - loss: 8.9384e-04 - acc: 1.0000
Epoch 126/500
228/228 [==============================] - 0s 249us/sample - loss: 8.9728e-04 - acc: 1.0000
Epoch 127/500
228/228 [==============================] - 0s 247us/sample - loss: 8.8036e-04 - acc: 1.0000
Epoch 128/500
228/228 [==============================] - 0s 246us/sample - loss: 8.4505e-04 - acc: 1.0000
Epoch 129/500
228/228 [==============================] - 0s 243us/sample - loss: 8.3812e-04 - acc: 1.0000
Epoch 130/500
228/228 [==============================] - 0s 251us/sample - loss: 8.1735e-04 - acc: 1.0000
Epoch 131/500
228/228 [==============================] - 0s 252us/sample - loss: 7.9991e-04 - acc: 1.0000
Epoch 132/500
228/228 [==============================] - 0s 262us/sample - loss: 8.1844e-04 - acc: 1.0000
Epoch 133/500
228/228 [==============================] - 0s 257us/sample - loss: 7.8049e-04 - acc: 1.0000
Epoch 134/500
228/228 [==============================] - 0s 262us/sample - loss: 7.6593e-04 - acc: 1.0000
Epoch 135/500
228/228 [==============================] - 0s 267us/sample - loss: 7.6150e-04 - acc: 1.0000
Epoch 136/500
228/228 [==============================] - 0s 248us/sample - loss: 7.5593e-04 - acc: 1.0000
Epoch 137/500
228/228 [==============================] - 0s 259us/sample - loss: 7.2850e-04 - acc: 1.0000
Epoch 138/500
228/228 [==============================] - 0s 258us/sample - loss: 7.2421e-04 - acc: 1.0000
Epoch 139/500
228/228 [==============================] - 0s 263us/sample - loss: 7.2319e-04 - acc: 1.0000
Epoch 140/500
228/228 [==============================] - 0s 260us/sample - loss: 7.0823e-04 - acc: 1.0000
Epoch 141/500
228/228 [==============================] - 0s 254us/sample - loss: 6.8147e-04 - acc: 1.0000
Epoch 142/500
228/228 [==============================] - 0s 252us/sample - loss: 6.6687e-04 - acc: 1.0000
Epoch 143/500
228/228 [==============================] - 0s 256us/sample - loss: 6.6993e-04 - acc: 1.0000
Epoch 144/500
228/228 [==============================] - 0s 258us/sample - loss: 6.5477e-04 - acc: 1.0000
Epoch 145/500
228/228 [==============================] - 0s 257us/sample - loss: 7.5597e-04 - acc: 1.0000
Epoch 146/500
228/228 [==============================] - 0s 260us/sample - loss: 6.4137e-04 - acc: 1.0000
Epoch 147/500
228/228 [==============================] - 0s 260us/sample - loss: 6.3759e-04 - acc: 1.0000
Epoch 148/500
228/228 [==============================] - 0s 255us/sample - loss: 6.3429e-04 - acc: 1.0000
Epoch 149/500
228/228 [==============================] - 0s 253us/sample - loss: 6.0948e-04 - acc: 1.0000
Epoch 150/500
228/228 [==============================] - 0s 255us/sample - loss: 6.6434e-04 - acc: 1.0000
Epoch 151/500
228/228 [==============================] - 0s 247us/sample - loss: 6.1224e-04 - acc: 1.0000
Epoch 152/500
228/228 [==============================] - 0s 245us/sample - loss: 5.8605e-04 - acc: 1.0000
Epoch 153/500
228/228 [==============================] - 0s 255us/sample - loss: 5.7731e-04 - acc: 1.0000
Epoch 154/500
228/228 [==============================] - 0s 259us/sample - loss: 5.4524e-04 - acc: 1.0000
Epoch 155/500
228/228 [==============================] - 0s 251us/sample - loss: 5.4586e-04 - acc: 1.0000
Epoch 156/500
228/228 [==============================] - 0s 249us/sample - loss: 5.3680e-04 - acc: 1.0000
Epoch 157/500
228/228 [==============================] - 0s 258us/sample - loss: 5.3437e-04 - acc: 1.0000
Epoch 158/500
228/228 [==============================] - 0s 266us/sample - loss: 5.2787e-04 - acc: 1.0000
Epoch 159/500
228/228 [==============================] - 0s 259us/sample - loss: 5.2479e-04 - acc: 1.0000
Epoch 160/500
228/228 [==============================] - 0s 248us/sample - loss: 5.1119e-04 - acc: 1.0000
Epoch 161/500
228/228 [==============================] - 0s 277us/sample - loss: 5.1259e-04 - acc: 1.0000
Epoch 162/500
228/228 [==============================] - 0s 258us/sample - loss: 5.0061e-04 - acc: 1.0000
Epoch 163/500
228/228 [==============================] - 0s 259us/sample - loss: 4.9198e-04 - acc: 1.0000
Epoch 164/500
228/228 [==============================] - 0s 273us/sample - loss: 4.8543e-04 - acc: 1.0000
Epoch 165/500
228/228 [==============================] - 0s 268us/sample - loss: 4.6681e-04 - acc: 1.0000
Epoch 166/500
228/228 [==============================] - 0s 266us/sample - loss: 4.7499e-04 - acc: 1.0000
Epoch 167/500
228/228 [==============================] - 0s 267us/sample - loss: 4.5973e-04 - acc: 1.0000
Epoch 168/500
228/228 [==============================] - 0s 266us/sample - loss: 4.5233e-04 - acc: 1.0000
Epoch 169/500
228/228 [==============================] - 0s 264us/sample - loss: 4.6907e-04 - acc: 1.0000
Epoch 170/500
228/228 [==============================] - 0s 261us/sample - loss: 4.5928e-04 - acc: 1.0000
Epoch 171/500
228/228 [==============================] - 0s 256us/sample - loss: 4.5506e-04 - acc: 1.0000
Epoch 172/500
228/228 [==============================] - 0s 261us/sample - loss: 4.4255e-04 - acc: 1.0000
Epoch 173/500
228/228 [==============================] - 0s 259us/sample - loss: 4.2255e-04 - acc: 1.0000
Epoch 174/500
228/228 [==============================] - 0s 261us/sample - loss: 4.1456e-04 - acc: 1.0000
Epoch 175/500
228/228 [==============================] - 0s 265us/sample - loss: 4.1780e-04 - acc: 1.0000
Epoch 176/500
228/228 [==============================] - 0s 263us/sample - loss: 4.1226e-04 - acc: 1.0000
Epoch 177/500
228/228 [==============================] - 0s 258us/sample - loss: 4.1386e-04 - acc: 1.0000
Epoch 178/500
228/228 [==============================] - 0s 263us/sample - loss: 4.1478e-04 - acc: 1.0000
Epoch 179/500
228/228 [==============================] - 0s 261us/sample - loss: 3.9518e-04 - acc: 1.0000
Epoch 180/500
228/228 [==============================] - 0s 265us/sample - loss: 3.9118e-04 - acc: 1.0000
Epoch 181/500
228/228 [==============================] - 0s 260us/sample - loss: 3.9698e-04 - acc: 1.0000
Epoch 182/500
228/228 [==============================] - 0s 261us/sample - loss: 4.0791e-04 - acc: 1.0000
Epoch 183/500
228/228 [==============================] - 0s 264us/sample - loss: 3.8537e-04 - acc: 1.0000
Epoch 184/500
228/228 [==============================] - 0s 254us/sample - loss: 3.6511e-04 - acc: 1.0000
Epoch 185/500
228/228 [==============================] - 0s 256us/sample - loss: 3.6581e-04 - acc: 1.0000
Epoch 186/500
228/228 [==============================] - 0s 270us/sample - loss: 3.5418e-04 - acc: 1.0000
Epoch 187/500
228/228 [==============================] - 0s 270us/sample - loss: 3.6168e-04 - acc: 1.0000
Epoch 188/500
228/228 [==============================] - 0s 273us/sample - loss: 3.5773e-04 - acc: 1.0000
Epoch 189/500
228/228 [==============================] - 0s 265us/sample - loss: 3.4485e-04 - acc: 1.0000
Epoch 190/500
228/228 [==============================] - 0s 251us/sample - loss: 3.4870e-04 - acc: 1.0000
Epoch 191/500
228/228 [==============================] - 0s 255us/sample - loss: 3.9561e-04 - acc: 1.0000
Epoch 192/500
228/228 [==============================] - 0s 265us/sample - loss: 3.3973e-04 - acc: 1.0000
Epoch 193/500
228/228 [==============================] - 0s 262us/sample - loss: 3.2330e-04 - acc: 1.0000
Epoch 194/500
228/228 [==============================] - 0s 258us/sample - loss: 3.3121e-04 - acc: 1.0000
Epoch 195/500
228/228 [==============================] - 0s 254us/sample - loss: 3.1553e-04 - acc: 1.0000
Epoch 196/500
228/228 [==============================] - 0s 262us/sample - loss: 3.0833e-04 - acc: 1.0000
Epoch 197/500
228/228 [==============================] - 0s 263us/sample - loss: 3.0667e-04 - acc: 1.0000
Epoch 198/500
228/228 [==============================] - 0s 261us/sample - loss: 3.0250e-04 - acc: 1.0000
Epoch 199/500
228/228 [==============================] - 0s 272us/sample - loss: 2.9672e-04 - acc: 1.0000
Epoch 200/500
228/228 [==============================] - 0s 265us/sample - loss: 2.9086e-04 - acc: 1.0000
Epoch 201/500
228/228 [==============================] - 0s 262us/sample - loss: 2.8694e-04 - acc: 1.0000
Epoch 202/500
228/228 [==============================] - 0s 260us/sample - loss: 2.9067e-04 - acc: 1.0000
Epoch 203/500
228/228 [==============================] - 0s 268us/sample - loss: 2.8274e-04 - acc: 1.0000
Epoch 204/500
228/228 [==============================] - 0s 253us/sample - loss: 2.7964e-04 - acc: 1.0000
Epoch 205/500
228/228 [==============================] - 0s 257us/sample - loss: 2.8207e-04 - acc: 1.0000
Epoch 206/500
228/228 [==============================] - 0s 249us/sample - loss: 2.9420e-04 - acc: 1.0000
Epoch 207/500
228/228 [==============================] - 0s 260us/sample - loss: 3.4686e-04 - acc: 1.0000
Epoch 208/500
228/228 [==============================] - 0s 263us/sample - loss: 4.7403e-04 - acc: 1.0000
Epoch 209/500
228/228 [==============================] - 0s 264us/sample - loss: 3.5980e-04 - acc: 1.0000
Epoch 210/500
228/228 [==============================] - 0s 261us/sample - loss: 3.0375e-04 - acc: 1.0000
Epoch 211/500
228/228 [==============================] - 0s 253us/sample - loss: 3.0189e-04 - acc: 1.0000
Epoch 212/500
228/228 [==============================] - 0s 267us/sample - loss: 2.6089e-04 - acc: 1.0000
Epoch 213/500
228/228 [==============================] - 0s 259us/sample - loss: 2.6479e-04 - acc: 1.0000
Epoch 214/500
228/228 [==============================] - 0s 266us/sample - loss: 2.5646e-04 - acc: 1.0000
Epoch 215/500
228/228 [==============================] - 0s 256us/sample - loss: 2.4380e-04 - acc: 1.0000
Epoch 216/500
228/228 [==============================] - 0s 251us/sample - loss: 2.3779e-04 - acc: 1.0000
Epoch 217/500
228/228 [==============================] - 0s 252us/sample - loss: 2.3951e-04 - acc: 1.0000
Epoch 218/500
228/228 [==============================] - 0s 271us/sample - loss: 2.6268e-04 - acc: 1.0000
Epoch 219/500
228/228 [==============================] - 0s 272us/sample - loss: 2.5557e-04 - acc: 1.0000
Epoch 220/500
228/228 [==============================] - 0s 282us/sample - loss: 2.3404e-04 - acc: 1.0000
Epoch 221/500
228/228 [==============================] - 0s 268us/sample - loss: 2.2445e-04 - acc: 1.0000
Epoch 222/500
228/228 [==============================] - 0s 255us/sample - loss: 2.2256e-04 - acc: 1.0000
Epoch 223/500
228/228 [==============================] - 0s 255us/sample - loss: 2.1797e-04 - acc: 1.0000
Epoch 224/500
228/228 [==============================] - 0s 262us/sample - loss: 2.1496e-04 - acc: 1.0000
Epoch 225/500
228/228 [==============================] - 0s 271us/sample - loss: 2.1562e-04 - acc: 1.0000
Epoch 226/500
228/228 [==============================] - 0s 260us/sample - loss: 2.1114e-04 - acc: 1.0000
Epoch 227/500
228/228 [==============================] - 0s 261us/sample - loss: 2.0771e-04 - acc: 1.0000
Epoch 228/500
228/228 [==============================] - 0s 253us/sample - loss: 2.0526e-04 - acc: 1.0000
Epoch 229/500
228/228 [==============================] - 0s 253us/sample - loss: 2.0329e-04 - acc: 1.0000
Epoch 230/500
228/228 [==============================] - 0s 265us/sample - loss: 2.0639e-04 - acc: 1.0000
Epoch 231/500
228/228 [==============================] - 0s 267us/sample - loss: 2.0100e-04 - acc: 1.0000
Epoch 232/500
228/228 [==============================] - 0s 252us/sample - loss: 1.9570e-04 - acc: 1.0000
Epoch 233/500
228/228 [==============================] - 0s 255us/sample - loss: 1.9673e-04 - acc: 1.0000
Epoch 234/500
228/228 [==============================] - 0s 255us/sample - loss: 1.9890e-04 - acc: 1.0000
Epoch 235/500
228/228 [==============================] - 0s 258us/sample - loss: 1.9124e-04 - acc: 1.0000
Epoch 236/500
228/228 [==============================] - 0s 263us/sample - loss: 1.9963e-04 - acc: 1.0000
Epoch 237/500
228/228 [==============================] - 0s 262us/sample - loss: 1.9466e-04 - acc: 1.0000
Epoch 238/500
228/228 [==============================] - 0s 257us/sample - loss: 1.8730e-04 - acc: 1.0000
Epoch 239/500
228/228 [==============================] - 0s 263us/sample - loss: 1.8336e-04 - acc: 1.0000
Epoch 240/500
228/228 [==============================] - 0s 262us/sample - loss: 1.8190e-04 - acc: 1.0000
Epoch 241/500
228/228 [==============================] - 0s 259us/sample - loss: 1.8021e-04 - acc: 1.0000
Epoch 242/500
228/228 [==============================] - 0s 257us/sample - loss: 1.7841e-04 - acc: 1.0000
Epoch 243/500
228/228 [==============================] - 0s 245us/sample - loss: 1.7711e-04 - acc: 1.0000
Epoch 244/500
228/228 [==============================] - 0s 248us/sample - loss: 1.7748e-04 - acc: 1.0000
Epoch 245/500
228/228 [==============================] - 0s 255us/sample - loss: 1.7424e-04 - acc: 1.0000
Epoch 246/500
228/228 [==============================] - 0s 252us/sample - loss: 1.7218e-04 - acc: 1.0000
Epoch 247/500
228/228 [==============================] - 0s 257us/sample - loss: 1.6986e-04 - acc: 1.0000
Epoch 248/500
228/228 [==============================] - 0s 254us/sample - loss: 1.6705e-04 - acc: 1.0000
Epoch 249/500
228/228 [==============================] - 0s 260us/sample - loss: 1.6558e-04 - acc: 1.0000
Epoch 250/500
228/228 [==============================] - 0s 253us/sample - loss: 1.6403e-04 - acc: 1.0000
Epoch 251/500
228/228 [==============================] - 0s 261us/sample - loss: 1.6272e-04 - acc: 1.0000
Epoch 252/500
228/228 [==============================] - 0s 262us/sample - loss: 1.6158e-04 - acc: 1.0000
Epoch 253/500
228/228 [==============================] - 0s 258us/sample - loss: 1.5934e-04 - acc: 1.0000
Epoch 254/500
228/228 [==============================] - 0s 252us/sample - loss: 1.5822e-04 - acc: 1.0000
Epoch 255/500
228/228 [==============================] - 0s 255us/sample - loss: 1.5909e-04 - acc: 1.0000
Epoch 256/500
228/228 [==============================] - 0s 268us/sample - loss: 1.7773e-04 - acc: 1.0000
Epoch 257/500
228/228 [==============================] - 0s 255us/sample - loss: 1.5907e-04 - acc: 1.0000
Epoch 258/500
228/228 [==============================] - 0s 267us/sample - loss: 1.5803e-04 - acc: 1.0000
Epoch 259/500
228/228 [==============================] - 0s 311us/sample - loss: 1.5649e-04 - acc: 1.0000
Epoch 260/500
228/228 [==============================] - 0s 286us/sample - loss: 1.5011e-04 - acc: 1.0000
Epoch 261/500
228/228 [==============================] - 0s 271us/sample - loss: 1.5030e-04 - acc: 1.0000
Epoch 262/500
228/228 [==============================] - 0s 285us/sample - loss: 1.4701e-04 - acc: 1.0000
Epoch 263/500
228/228 [==============================] - 0s 283us/sample - loss: 1.4546e-04 - acc: 1.0000
Epoch 264/500
228/228 [==============================] - 0s 285us/sample - loss: 1.4518e-04 - acc: 1.0000
Epoch 265/500
228/228 [==============================] - 0s 283us/sample - loss: 1.4360e-04 - acc: 1.0000
Epoch 266/500
228/228 [==============================] - 0s 285us/sample - loss: 1.4405e-04 - acc: 1.0000
Epoch 267/500
228/228 [==============================] - 0s 285us/sample - loss: 1.4423e-04 - acc: 1.0000
Epoch 268/500
228/228 [==============================] - 0s 298us/sample - loss: 1.4052e-04 - acc: 1.0000
Epoch 269/500
228/228 [==============================] - 0s 264us/sample - loss: 1.3943e-04 - acc: 1.0000
Epoch 270/500
228/228 [==============================] - 0s 262us/sample - loss: 1.4436e-04 - acc: 1.0000
Epoch 271/500
228/228 [==============================] - 0s 267us/sample - loss: 1.3719e-04 - acc: 1.0000
Epoch 272/500
228/228 [==============================] - 0s 258us/sample - loss: 1.3459e-04 - acc: 1.0000
Epoch 273/500
228/228 [==============================] - 0s 266us/sample - loss: 1.3345e-04 - acc: 1.0000
Epoch 274/500
228/228 [==============================] - 0s 264us/sample - loss: 1.3167e-04 - acc: 1.0000
Epoch 275/500
228/228 [==============================] - 0s 266us/sample - loss: 1.3034e-04 - acc: 1.0000
Epoch 276/500
228/228 [==============================] - 0s 262us/sample - loss: 1.2905e-04 - acc: 1.0000
Epoch 277/500
228/228 [==============================] - 0s 262us/sample - loss: 1.3051e-04 - acc: 1.0000
Epoch 278/500
228/228 [==============================] - 0s 264us/sample - loss: 1.2712e-04 - acc: 1.0000
Epoch 279/500
228/228 [==============================] - 0s 264us/sample - loss: 1.2628e-04 - acc: 1.0000
Epoch 280/500
228/228 [==============================] - 0s 265us/sample - loss: 1.2655e-04 - acc: 1.0000
Epoch 281/500
228/228 [==============================] - 0s 260us/sample - loss: 1.2338e-04 - acc: 1.0000
Epoch 282/500
228/228 [==============================] - 0s 253us/sample - loss: 1.2606e-04 - acc: 1.0000
Epoch 283/500
228/228 [==============================] - 0s 252us/sample - loss: 1.2513e-04 - acc: 1.0000
Epoch 284/500
228/228 [==============================] - 0s 258us/sample - loss: 1.2335e-04 - acc: 1.0000
Epoch 285/500
228/228 [==============================] - 0s 264us/sample - loss: 1.2235e-04 - acc: 1.0000
Epoch 286/500
228/228 [==============================] - 0s 262us/sample - loss: 1.1832e-04 - acc: 1.0000
Epoch 287/500
228/228 [==============================] - 0s 256us/sample - loss: 1.1708e-04 - acc: 1.0000
Epoch 288/500
228/228 [==============================] - 0s 249us/sample - loss: 1.1612e-04 - acc: 1.0000
Epoch 289/500
228/228 [==============================] - 0s 252us/sample - loss: 1.1507e-04 - acc: 1.0000
Epoch 290/500
228/228 [==============================] - 0s 250us/sample - loss: 1.1416e-04 - acc: 1.0000
Epoch 291/500
228/228 [==============================] - 0s 263us/sample - loss: 1.1225e-04 - acc: 1.0000
Epoch 292/500
228/228 [==============================] - 0s 258us/sample - loss: 1.1150e-04 - acc: 1.0000
Epoch 293/500
228/228 [==============================] - 0s 264us/sample - loss: 1.1081e-04 - acc: 1.0000
Epoch 294/500
228/228 [==============================] - 0s 263us/sample - loss: 1.1042e-04 - acc: 1.0000
Epoch 295/500
228/228 [==============================] - 0s 259us/sample - loss: 1.0852e-04 - acc: 1.0000
Epoch 296/500
228/228 [==============================] - 0s 265us/sample - loss: 1.0781e-04 - acc: 1.0000
Epoch 297/500
228/228 [==============================] - 0s 266us/sample - loss: 1.0917e-04 - acc: 1.0000
Epoch 298/500
228/228 [==============================] - 0s 273us/sample - loss: 1.1131e-04 - acc: 1.0000
Epoch 299/500
228/228 [==============================] - 0s 270us/sample - loss: 1.0922e-04 - acc: 1.0000
Epoch 300/500
228/228 [==============================] - 0s 260us/sample - loss: 1.0702e-04 - acc: 1.0000
Epoch 301/500
228/228 [==============================] - 0s 265us/sample - loss: 1.0428e-04 - acc: 1.0000
Epoch 302/500
228/228 [==============================] - 0s 266us/sample - loss: 1.0275e-04 - acc: 1.0000
Epoch 303/500
228/228 [==============================] - 0s 264us/sample - loss: 1.0185e-04 - acc: 1.0000
Epoch 304/500
228/228 [==============================] - 0s 255us/sample - loss: 1.0037e-04 - acc: 1.0000
Epoch 305/500
228/228 [==============================] - 0s 261us/sample - loss: 9.9970e-05 - acc: 1.0000
Epoch 306/500
228/228 [==============================] - 0s 263us/sample - loss: 1.0308e-04 - acc: 1.0000
Epoch 307/500
228/228 [==============================] - 0s 254us/sample - loss: 1.0978e-04 - acc: 1.0000
Epoch 308/500
228/228 [==============================] - 0s 255us/sample - loss: 1.0559e-04 - acc: 1.0000
Epoch 309/500
228/228 [==============================] - 0s 251us/sample - loss: 9.8575e-05 - acc: 1.0000
Epoch 310/500
228/228 [==============================] - 0s 255us/sample - loss: 9.5998e-05 - acc: 1.0000
Epoch 311/500
228/228 [==============================] - 0s 255us/sample - loss: 9.4849e-05 - acc: 1.0000
Epoch 312/500
228/228 [==============================] - 0s 255us/sample - loss: 9.3696e-05 - acc: 1.0000
Epoch 313/500
228/228 [==============================] - 0s 263us/sample - loss: 9.3197e-05 - acc: 1.0000
Epoch 314/500
228/228 [==============================] - 0s 266us/sample - loss: 9.2684e-05 - acc: 1.0000
Epoch 315/500
228/228 [==============================] - 0s 263us/sample - loss: 9.1894e-05 - acc: 1.0000
Epoch 316/500
228/228 [==============================] - 0s 262us/sample - loss: 9.0923e-05 - acc: 1.0000
Epoch 317/500
228/228 [==============================] - 0s 263us/sample - loss: 8.9942e-05 - acc: 1.0000
Epoch 318/500
228/228 [==============================] - 0s 266us/sample - loss: 8.9499e-05 - acc: 1.0000
Epoch 319/500
228/228 [==============================] - 0s 257us/sample - loss: 8.8388e-05 - acc: 1.0000
Epoch 320/500
228/228 [==============================] - 0s 271us/sample - loss: 8.8046e-05 - acc: 1.0000
Epoch 321/500
228/228 [==============================] - 0s 263us/sample - loss: 8.8658e-05 - acc: 1.0000
Epoch 322/500
228/228 [==============================] - 0s 260us/sample - loss: 9.0339e-05 - acc: 1.0000
Epoch 323/500
228/228 [==============================] - 0s 265us/sample - loss: 8.7122e-05 - acc: 1.0000
Epoch 324/500
228/228 [==============================] - 0s 265us/sample - loss: 8.5980e-05 - acc: 1.0000
Epoch 325/500
228/228 [==============================] - 0s 265us/sample - loss: 8.4299e-05 - acc: 1.0000
Epoch 326/500
228/228 [==============================] - 0s 265us/sample - loss: 8.4336e-05 - acc: 1.0000
Epoch 327/500
228/228 [==============================] - 0s 268us/sample - loss: 8.2393e-05 - acc: 1.0000
Epoch 328/500
228/228 [==============================] - 0s 263us/sample - loss: 8.1838e-05 - acc: 1.0000
Epoch 329/500
228/228 [==============================] - 0s 257us/sample - loss: 8.1222e-05 - acc: 1.0000
Epoch 330/500
228/228 [==============================] - 0s 259us/sample - loss: 8.0438e-05 - acc: 1.0000
Epoch 331/500
228/228 [==============================] - 0s 251us/sample - loss: 8.0443e-05 - acc: 1.0000
Epoch 332/500
228/228 [==============================] - 0s 251us/sample - loss: 7.9780e-05 - acc: 1.0000
Epoch 333/500
228/228 [==============================] - 0s 268us/sample - loss: 7.9119e-05 - acc: 1.0000
Epoch 334/500
228/228 [==============================] - 0s 264us/sample - loss: 7.8743e-05 - acc: 1.0000
Epoch 335/500
228/228 [==============================] - 0s 260us/sample - loss: 7.9419e-05 - acc: 1.0000
Epoch 336/500
228/228 [==============================] - 0s 254us/sample - loss: 7.7827e-05 - acc: 1.0000
Epoch 337/500
228/228 [==============================] - 0s 243us/sample - loss: 7.7013e-05 - acc: 1.0000
Epoch 338/500
228/228 [==============================] - 0s 246us/sample - loss: 7.5946e-05 - acc: 1.0000
Epoch 339/500
228/228 [==============================] - 0s 256us/sample - loss: 7.4722e-05 - acc: 1.0000
Epoch 340/500
228/228 [==============================] - 0s 265us/sample - loss: 7.4331e-05 - acc: 1.0000
Epoch 341/500
228/228 [==============================] - 0s 265us/sample - loss: 7.4182e-05 - acc: 1.0000
Epoch 342/500
228/228 [==============================] - 0s 262us/sample - loss: 7.3387e-05 - acc: 1.0000
Epoch 343/500
228/228 [==============================] - 0s 253us/sample - loss: 7.2546e-05 - acc: 1.0000
Epoch 344/500
228/228 [==============================] - 0s 263us/sample - loss: 7.2508e-05 - acc: 1.0000
Epoch 345/500
228/228 [==============================] - 0s 260us/sample - loss: 7.2407e-05 - acc: 1.0000
Epoch 346/500
228/228 [==============================] - 0s 258us/sample - loss: 7.1956e-05 - acc: 1.0000
Epoch 347/500
228/228 [==============================] - 0s 257us/sample - loss: 7.0753e-05 - acc: 1.0000
Epoch 348/500
228/228 [==============================] - 0s 256us/sample - loss: 6.9889e-05 - acc: 1.0000
Epoch 349/500
228/228 [==============================] - 0s 247us/sample - loss: 7.2250e-05 - acc: 1.0000
Epoch 350/500
228/228 [==============================] - 0s 255us/sample - loss: 7.1048e-05 - acc: 1.0000
Epoch 351/500
228/228 [==============================] - 0s 262us/sample - loss: 6.8814e-05 - acc: 1.0000
Epoch 352/500
228/228 [==============================] - 0s 268us/sample - loss: 7.0012e-05 - acc: 1.0000
Epoch 353/500
228/228 [==============================] - 0s 260us/sample - loss: 6.9445e-05 - acc: 1.0000
Epoch 354/500
228/228 [==============================] - 0s 262us/sample - loss: 6.7397e-05 - acc: 1.0000
Epoch 355/500
228/228 [==============================] - 0s 264us/sample - loss: 6.6791e-05 - acc: 1.0000
Epoch 356/500
228/228 [==============================] - 0s 266us/sample - loss: 6.5807e-05 - acc: 1.0000
Epoch 357/500
228/228 [==============================] - 0s 267us/sample - loss: 6.4866e-05 - acc: 1.0000
Epoch 358/500
228/228 [==============================] - 0s 268us/sample - loss: 6.4686e-05 - acc: 1.0000
Epoch 359/500
228/228 [==============================] - 0s 271us/sample - loss: 6.3930e-05 - acc: 1.0000
Epoch 360/500
228/228 [==============================] - 0s 268us/sample - loss: 6.3668e-05 - acc: 1.0000
Epoch 361/500
228/228 [==============================] - 0s 261us/sample - loss: 6.3222e-05 - acc: 1.0000
Epoch 362/500
228/228 [==============================] - 0s 270us/sample - loss: 6.2816e-05 - acc: 1.0000
Epoch 363/500
228/228 [==============================] - 0s 266us/sample - loss: 6.2264e-05 - acc: 1.0000
Epoch 364/500
228/228 [==============================] - 0s 265us/sample - loss: 6.2109e-05 - acc: 1.0000
Epoch 365/500
228/228 [==============================] - 0s 270us/sample - loss: 6.1316e-05 - acc: 1.0000
Epoch 366/500
228/228 [==============================] - 0s 281us/sample - loss: 6.0857e-05 - acc: 1.0000
Epoch 367/500
228/228 [==============================] - 0s 307us/sample - loss: 6.0921e-05 - acc: 1.0000
Epoch 368/500
228/228 [==============================] - 0s 281us/sample - loss: 5.9906e-05 - acc: 1.0000
Epoch 369/500
228/228 [==============================] - 0s 272us/sample - loss: 5.9552e-05 - acc: 1.0000
Epoch 370/500
228/228 [==============================] - 0s 255us/sample - loss: 6.0203e-05 - acc: 1.0000
Epoch 371/500
228/228 [==============================] - 0s 256us/sample - loss: 5.8926e-05 - acc: 1.0000
Epoch 372/500
228/228 [==============================] - 0s 253us/sample - loss: 5.8592e-05 - acc: 1.0000
Epoch 373/500
228/228 [==============================] - 0s 253us/sample - loss: 5.8631e-05 - acc: 1.0000
Epoch 374/500
228/228 [==============================] - 0s 258us/sample - loss: 5.9461e-05 - acc: 1.0000
Epoch 375/500
228/228 [==============================] - 0s 257us/sample - loss: 6.4239e-05 - acc: 1.0000
Epoch 376/500
228/228 [==============================] - 0s 261us/sample - loss: 6.0578e-05 - acc: 1.0000
Epoch 377/500
228/228 [==============================] - 0s 265us/sample - loss: 5.6998e-05 - acc: 1.0000
Epoch 378/500
228/228 [==============================] - 0s 264us/sample - loss: 5.5936e-05 - acc: 1.0000
Epoch 379/500
228/228 [==============================] - 0s 263us/sample - loss: 5.5708e-05 - acc: 1.0000
Epoch 380/500
228/228 [==============================] - 0s 258us/sample - loss: 5.4743e-05 - acc: 1.0000
Epoch 381/500
228/228 [==============================] - 0s 261us/sample - loss: 5.5271e-05 - acc: 1.0000
Epoch 382/500
228/228 [==============================] - 0s 263us/sample - loss: 5.4480e-05 - acc: 1.0000
Epoch 383/500
228/228 [==============================] - 0s 271us/sample - loss: 5.5207e-05 - acc: 1.0000
Epoch 384/500
228/228 [==============================] - 0s 263us/sample - loss: 5.3954e-05 - acc: 1.0000
Epoch 385/500
228/228 [==============================] - 0s 274us/sample - loss: 5.4299e-05 - acc: 1.0000
Epoch 386/500
228/228 [==============================] - 0s 274us/sample - loss: 5.4059e-05 - acc: 1.0000
Epoch 387/500
228/228 [==============================] - 0s 269us/sample - loss: 5.5798e-05 - acc: 1.0000
Epoch 388/500
228/228 [==============================] - 0s 265us/sample - loss: 5.5233e-05 - acc: 1.0000
Epoch 389/500
228/228 [==============================] - 0s 267us/sample - loss: 5.2792e-05 - acc: 1.0000
Epoch 390/500
228/228 [==============================] - 0s 265us/sample - loss: 5.1564e-05 - acc: 1.0000
Epoch 391/500
228/228 [==============================] - 0s 266us/sample - loss: 5.0435e-05 - acc: 1.0000
Epoch 392/500
228/228 [==============================] - 0s 265us/sample - loss: 4.9730e-05 - acc: 1.0000
Epoch 393/500
228/228 [==============================] - 0s 260us/sample - loss: 4.9995e-05 - acc: 1.0000
Epoch 394/500
228/228 [==============================] - 0s 259us/sample - loss: 4.9475e-05 - acc: 1.0000
Epoch 395/500
228/228 [==============================] - 0s 260us/sample - loss: 4.9055e-05 - acc: 1.0000
Epoch 396/500
228/228 [==============================] - 0s 264us/sample - loss: 4.8650e-05 - acc: 1.0000
Epoch 397/500
228/228 [==============================] - 0s 258us/sample - loss: 4.7919e-05 - acc: 1.0000
Epoch 398/500
228/228 [==============================] - 0s 261us/sample - loss: 4.7939e-05 - acc: 1.0000
Epoch 399/500
228/228 [==============================] - 0s 262us/sample - loss: 4.7598e-05 - acc: 1.0000
Epoch 400/500
228/228 [==============================] - 0s 257us/sample - loss: 4.7079e-05 - acc: 1.0000
Epoch 401/500
228/228 [==============================] - 0s 259us/sample - loss: 4.6470e-05 - acc: 1.0000
Epoch 402/500
228/228 [==============================] - 0s 255us/sample - loss: 4.6034e-05 - acc: 1.0000
Epoch 403/500
228/228 [==============================] - 0s 262us/sample - loss: 4.5975e-05 - acc: 1.0000
Epoch 404/500
228/228 [==============================] - 0s 261us/sample - loss: 4.5479e-05 - acc: 1.0000
Epoch 405/500
228/228 [==============================] - 0s 258us/sample - loss: 4.4930e-05 - acc: 1.0000
Epoch 406/500
228/228 [==============================] - 0s 265us/sample - loss: 4.4847e-05 - acc: 1.0000
Epoch 407/500
228/228 [==============================] - 0s 257us/sample - loss: 4.4617e-05 - acc: 1.0000
Epoch 408/500
228/228 [==============================] - 0s 264us/sample - loss: 4.4118e-05 - acc: 1.0000
Epoch 409/500
228/228 [==============================] - 0s 265us/sample - loss: 4.3792e-05 - acc: 1.0000
Epoch 410/500
228/228 [==============================] - 0s 262us/sample - loss: 4.3821e-05 - acc: 1.0000
Epoch 411/500
228/228 [==============================] - 0s 258us/sample - loss: 4.3596e-05 - acc: 1.0000
Epoch 412/500
228/228 [==============================] - 0s 249us/sample - loss: 4.3151e-05 - acc: 1.0000
Epoch 413/500
228/228 [==============================] - 0s 257us/sample - loss: 4.3310e-05 - acc: 1.0000
Epoch 414/500
228/228 [==============================] - 0s 265us/sample - loss: 4.2859e-05 - acc: 1.0000
Epoch 415/500
228/228 [==============================] - 0s 268us/sample - loss: 4.3391e-05 - acc: 1.0000
Epoch 416/500
228/228 [==============================] - 0s 265us/sample - loss: 4.1939e-05 - acc: 1.0000
Epoch 417/500
228/228 [==============================] - 0s 264us/sample - loss: 4.1425e-05 - acc: 1.0000
Epoch 418/500
228/228 [==============================] - 0s 263us/sample - loss: 4.1180e-05 - acc: 1.0000
Epoch 419/500
228/228 [==============================] - 0s 262us/sample - loss: 4.0916e-05 - acc: 1.0000
Epoch 420/500
228/228 [==============================] - 0s 263us/sample - loss: 4.0792e-05 - acc: 1.0000
Epoch 421/500
228/228 [==============================] - 0s 267us/sample - loss: 4.0636e-05 - acc: 1.0000
Epoch 422/500
228/228 [==============================] - 0s 269us/sample - loss: 4.1676e-05 - acc: 1.0000
Epoch 423/500
228/228 [==============================] - 0s 267us/sample - loss: 4.3209e-05 - acc: 1.0000
Epoch 424/500
228/228 [==============================] - 0s 257us/sample - loss: 4.1754e-05 - acc: 1.0000
Epoch 425/500
228/228 [==============================] - 0s 263us/sample - loss: 3.9839e-05 - acc: 1.0000
Epoch 426/500
228/228 [==============================] - 0s 271us/sample - loss: 3.9245e-05 - acc: 1.0000
Epoch 427/500
228/228 [==============================] - 0s 261us/sample - loss: 3.8914e-05 - acc: 1.0000
Epoch 428/500
228/228 [==============================] - 0s 259us/sample - loss: 3.8512e-05 - acc: 1.0000
Epoch 429/500
228/228 [==============================] - 0s 264us/sample - loss: 3.8197e-05 - acc: 1.0000
Epoch 430/500
228/228 [==============================] - 0s 264us/sample - loss: 3.8201e-05 - acc: 1.0000
Epoch 431/500
228/228 [==============================] - 0s 259us/sample - loss: 3.7821e-05 - acc: 1.0000
Epoch 432/500
228/228 [==============================] - 0s 251us/sample - loss: 3.7446e-05 - acc: 1.0000
Epoch 433/500
228/228 [==============================] - 0s 269us/sample - loss: 3.7209e-05 - acc: 1.0000
Epoch 434/500
228/228 [==============================] - 0s 269us/sample - loss: 3.6860e-05 - acc: 1.0000
Epoch 435/500
228/228 [==============================] - 0s 264us/sample - loss: 3.6772e-05 - acc: 1.0000
Epoch 436/500
228/228 [==============================] - 0s 264us/sample - loss: 3.6657e-05 - acc: 1.0000
Epoch 437/500
228/228 [==============================] - 0s 263us/sample - loss: 3.6346e-05 - acc: 1.0000
Epoch 438/500
228/228 [==============================] - 0s 269us/sample - loss: 3.6001e-05 - acc: 1.0000
Epoch 439/500
228/228 [==============================] - 0s 263us/sample - loss: 3.5794e-05 - acc: 1.0000
Epoch 440/500
228/228 [==============================] - 0s 259us/sample - loss: 3.5798e-05 - acc: 1.0000
Epoch 441/500
228/228 [==============================] - 0s 265us/sample - loss: 3.5371e-05 - acc: 1.0000
Epoch 442/500
228/228 [==============================] - 0s 266us/sample - loss: 3.5600e-05 - acc: 1.0000
Epoch 443/500
228/228 [==============================] - 0s 263us/sample - loss: 3.5906e-05 - acc: 1.0000
Epoch 444/500
228/228 [==============================] - 0s 257us/sample - loss: 3.4772e-05 - acc: 1.0000
Epoch 445/500
228/228 [==============================] - 0s 262us/sample - loss: 3.4682e-05 - acc: 1.0000
Epoch 446/500
228/228 [==============================] - 0s 272us/sample - loss: 3.4242e-05 - acc: 1.0000
Epoch 447/500
228/228 [==============================] - 0s 272us/sample - loss: 3.4238e-05 - acc: 1.0000
Epoch 448/500
228/228 [==============================] - 0s 262us/sample - loss: 3.3689e-05 - acc: 1.0000
Epoch 449/500
228/228 [==============================] - 0s 308us/sample - loss: 3.4041e-05 - acc: 1.0000
Epoch 450/500
228/228 [==============================] - 0s 271us/sample - loss: 3.3657e-05 - acc: 1.0000
Epoch 451/500
228/228 [==============================] - 0s 267us/sample - loss: 3.3974e-05 - acc: 1.0000
Epoch 452/500
228/228 [==============================] - 0s 274us/sample - loss: 3.4167e-05 - acc: 1.0000
Epoch 453/500
228/228 [==============================] - 0s 274us/sample - loss: 3.3659e-05 - acc: 1.0000
Epoch 454/500
228/228 [==============================] - 0s 258us/sample - loss: 3.2876e-05 - acc: 1.0000
Epoch 455/500
228/228 [==============================] - 0s 256us/sample - loss: 3.2235e-05 - acc: 1.0000
Epoch 456/500
228/228 [==============================] - 0s 274us/sample - loss: 3.1697e-05 - acc: 1.0000
Epoch 457/500
228/228 [==============================] - 0s 281us/sample - loss: 3.2297e-05 - acc: 1.0000
Epoch 458/500
228/228 [==============================] - 0s 260us/sample - loss: 3.1529e-05 - acc: 1.0000
Epoch 459/500
228/228 [==============================] - 0s 259us/sample - loss: 3.1141e-05 - acc: 1.0000
Epoch 460/500
228/228 [==============================] - 0s 279us/sample - loss: 3.1238e-05 - acc: 1.0000
Epoch 461/500
228/228 [==============================] - 0s 258us/sample - loss: 3.0762e-05 - acc: 1.0000
Epoch 462/500
228/228 [==============================] - 0s 271us/sample - loss: 3.0590e-05 - acc: 1.0000
Epoch 463/500
228/228 [==============================] - 0s 278us/sample - loss: 3.0712e-05 - acc: 1.0000
Epoch 464/500
228/228 [==============================] - 0s 269us/sample - loss: 3.0469e-05 - acc: 1.0000
Epoch 465/500
228/228 [==============================] - 0s 272us/sample - loss: 2.9771e-05 - acc: 1.0000
Epoch 466/500
228/228 [==============================] - 0s 265us/sample - loss: 2.9692e-05 - acc: 1.0000
Epoch 467/500
228/228 [==============================] - 0s 254us/sample - loss: 2.9933e-05 - acc: 1.0000
Epoch 468/500
228/228 [==============================] - 0s 257us/sample - loss: 2.9780e-05 - acc: 1.0000
Epoch 469/500
228/228 [==============================] - 0s 258us/sample - loss: 2.9474e-05 - acc: 1.0000
Epoch 470/500
228/228 [==============================] - 0s 261us/sample - loss: 2.9559e-05 - acc: 1.0000
Epoch 471/500
228/228 [==============================] - 0s 263us/sample - loss: 2.9200e-05 - acc: 1.0000
Epoch 472/500
228/228 [==============================] - 0s 259us/sample - loss: 2.8882e-05 - acc: 1.0000
Epoch 473/500
228/228 [==============================] - 0s 261us/sample - loss: 2.8328e-05 - acc: 1.0000
Epoch 474/500
228/228 [==============================] - 0s 255us/sample - loss: 2.8122e-05 - acc: 1.0000
Epoch 475/500
228/228 [==============================] - 0s 263us/sample - loss: 2.7856e-05 - acc: 1.0000
Epoch 476/500
228/228 [==============================] - 0s 267us/sample - loss: 2.7688e-05 - acc: 1.0000
Epoch 477/500
228/228 [==============================] - 0s 271us/sample - loss: 2.7477e-05 - acc: 1.0000
Epoch 478/500
228/228 [==============================] - 0s 267us/sample - loss: 2.7312e-05 - acc: 1.0000
Epoch 479/500
228/228 [==============================] - 0s 283us/sample - loss: 2.7308e-05 - acc: 1.0000
Epoch 480/500
228/228 [==============================] - 0s 257us/sample - loss: 2.7017e-05 - acc: 1.0000
Epoch 481/500
228/228 [==============================] - 0s 268us/sample - loss: 2.6841e-05 - acc: 1.0000
Epoch 482/500
228/228 [==============================] - 0s 270us/sample - loss: 2.6789e-05 - acc: 1.0000
Epoch 483/500
228/228 [==============================] - 0s 267us/sample - loss: 2.6833e-05 - acc: 1.0000
Epoch 484/500
228/228 [==============================] - 0s 263us/sample - loss: 2.6544e-05 - acc: 1.0000
Epoch 485/500
228/228 [==============================] - 0s 260us/sample - loss: 2.6565e-05 - acc: 1.0000
Epoch 486/500
228/228 [==============================] - 0s 261us/sample - loss: 2.6167e-05 - acc: 1.0000
Epoch 487/500
228/228 [==============================] - 0s 261us/sample - loss: 2.6141e-05 - acc: 1.0000
Epoch 488/500
228/228 [==============================] - 0s 266us/sample - loss: 2.5827e-05 - acc: 1.0000
Epoch 489/500
228/228 [==============================] - 0s 364us/sample - loss: 2.5601e-05 - acc: 1.0000
Epoch 490/500
228/228 [==============================] - 0s 317us/sample - loss: 2.5358e-05 - acc: 1.0000
Epoch 491/500
228/228 [==============================] - 0s 266us/sample - loss: 2.5144e-05 - acc: 1.0000
Epoch 492/500
228/228 [==============================] - 0s 264us/sample - loss: 2.5163e-05 - acc: 1.0000
Epoch 493/500
228/228 [==============================] - 0s 262us/sample - loss: 2.4917e-05 - acc: 1.0000
Epoch 494/500
228/228 [==============================] - 0s 269us/sample - loss: 2.4707e-05 - acc: 1.0000
Epoch 495/500
228/228 [==============================] - 0s 262us/sample - loss: 2.4688e-05 - acc: 1.0000
Epoch 496/500
228/228 [==============================] - 0s 270us/sample - loss: 2.4535e-05 - acc: 1.0000
Epoch 497/500
228/228 [==============================] - 0s 265us/sample - loss: 2.4472e-05 - acc: 1.0000
Epoch 498/500
228/228 [==============================] - 0s 268us/sample - loss: 2.4872e-05 - acc: 1.0000
Epoch 499/500
228/228 [==============================] - 0s 268us/sample - loss: 2.4630e-05 - acc: 1.0000
Epoch 500/500
228/228 [==============================] - 0s 270us/sample - loss: 2.4183e-05 - acc: 1.0000
Out[37]:
<tensorflow.python.keras.callbacks.History at 0x7f815ac63890>
In [38]:
model.evaluate(test_images, test_labels)
24/24 [==============================] - 0s 3ms/sample - loss: 0.0432 - acc: 0.9583
Out[38]:
[0.043238889425992966, 0.9583333]
In [ ]:
 
In [ ]:
 

ARC&Ceps Methods below: not good

import tensorflow as tf from tensorflow import keras

model = keras.Sequential([ keras.layers.Flatten(input_shape=(8,8)), keras.layers.Dense(10240, activation = tf.nn.relu), keras.layers.Dense(7, activation = tf.nn.softmax) ])

model.compile(optimizer = tf.train.AdamOptimizer(), loss = 'sparse_categorical_crossentropy', metrics=['accuracy'])

model.fit(training_ARC_images, training_ARC_labels, epochs = 500)

model.evaluate(test_ARC_images, test_ARC_labels)

import tensorflow as tf from tensorflow import keras

model = keras.Sequential([ keras.layers.Flatten(input_shape=(8,8)), keras.layers.Dense(10240, activation = tf.nn.relu), keras.layers.Dense(7, activation = tf.nn.softmax) ])

model.compile(optimizer = tf.train.AdamOptimizer(), loss = 'sparse_categorical_crossentropy', metrics=['accuracy'])

model.fit(training_Ceps_images, training_Ceps_labels, epochs = 500)

model.evaluate(test_Ceps_images, test_Ceps_labels)

In [ ]:
 
In [ ]: